In [1]:
from Poisson.autoencoder import AnalyticAutoEncoder

aae = AnalyticAutoEncoder(epochs=25, lr=0.001)
aae.train(verbose=2, seed=2)
training...
Epoch 1/25
48/48 - 0s - loss: 9.9090 - val_loss: 3.9372
Epoch 2/25
48/48 - 0s - loss: 2.6747 - val_loss: 2.1019
Epoch 3/25
48/48 - 0s - loss: 1.6913 - val_loss: 1.5315
Epoch 4/25
48/48 - 0s - loss: 1.3617 - val_loss: 1.2938
Epoch 5/25
48/48 - 0s - loss: 1.1623 - val_loss: 1.0980
Epoch 6/25
48/48 - 0s - loss: 0.9757 - val_loss: 0.9044
Epoch 7/25
48/48 - 0s - loss: 0.7947 - val_loss: 0.7280
Epoch 8/25
48/48 - 0s - loss: 0.6279 - val_loss: 0.5618
Epoch 9/25
48/48 - 0s - loss: 0.4768 - val_loss: 0.4174
Epoch 10/25
48/48 - 0s - loss: 0.3468 - val_loss: 0.2969
Epoch 11/25
48/48 - 0s - loss: 0.2409 - val_loss: 0.1996
Epoch 12/25
48/48 - 0s - loss: 0.1584 - val_loss: 0.1269
Epoch 13/25
48/48 - 0s - loss: 0.0985 - val_loss: 0.0767
Epoch 14/25
48/48 - 0s - loss: 0.0579 - val_loss: 0.0440
Epoch 15/25
48/48 - 0s - loss: 0.0322 - val_loss: 0.0236
Epoch 16/25
48/48 - 0s - loss: 0.0169 - val_loss: 0.0119
Epoch 17/25
48/48 - 0s - loss: 0.0084 - val_loss: 0.0057
Epoch 18/25
48/48 - 0s - loss: 0.0040 - val_loss: 0.0026
Epoch 19/25
48/48 - 0s - loss: 0.0018 - val_loss: 0.0012
Epoch 20/25
48/48 - 0s - loss: 7.6862e-04 - val_loss: 4.8258e-04
Epoch 21/25
48/48 - 0s - loss: 3.1492e-04 - val_loss: 1.9352e-04
Epoch 22/25
48/48 - 0s - loss: 1.2441e-04 - val_loss: 7.4062e-05
Epoch 23/25
48/48 - 0s - loss: 4.7925e-05 - val_loss: 2.7667e-05
Epoch 24/25
48/48 - 0s - loss: 1.8235e-05 - val_loss: 1.0765e-05
Epoch 25/25
48/48 - 0s - loss: 7.3922e-06 - val_loss: 4.5636e-06
--------
Reconstructed Phi MSE
Training: 4.744905254483456e-06
Validation: 4.563641141430708e-06
Test: 4.234734660713002e-06
--------
Latent theta MSE: [2.93000189e-03 1.99665479e-05 2.01951926e-05]
In [2]:
aae.plot_theta_fit()
theta MSE: [2.93000189e-03 1.99665479e-05 2.01951926e-05]
transformed theta MSE: [3.05743507e-07 1.83504352e-07 1.58667190e-07]
In [3]:
aae.plot_solution_fit()
Latent theta MSE: [2.93000189e-03 1.99665479e-05 2.01951926e-05]
Reconstructed Phi MSE: 4.2347390205426525e-06
In [4]:
aae.plot_theta_fit(sigma=1.0)
theta MSE: [13.58995288  2.28912929  3.22476726]
transformed theta MSE: [0.00149237 0.03410192 0.05217737]
In [5]:
aae.plot_solution_fit(sigma=1.0)
Latent theta MSE: [13.58995288  2.28912929  3.22476726]
Reconstructed Phi MSE: 2.698988241698414

Bootstrappin

Train on clean, test on noisy

In [6]:
num_boots = 100
aae.bootstrap(num_boots, train_sigma=0, test_sigma=1.0)
Bootstrapping with 100 boot samples
done
In [7]:
aae.plot_theta_boot()
In [8]:
aae.plot_solution_boot()

Train on noisy, test on clean

In [9]:
aae2 = AnalyticAutoEncoder(epochs=25, lr=0.001)
aae2.bootstrap(num_boots, train_sigma=1.0, test_sigma=0)
Bootstrapping with 100 boot samples
done
In [10]:
# test on noisy data
aae2.plot_theta_boot()
In [11]:
# test on noisy data
aae2.plot_solution_boot()

Train and test on noisy

In [12]:
aae3 = AnalyticAutoEncoder(epochs=25, lr=0.001)
aae3.bootstrap(num_boots, train_sigma=1.0, test_sigma=1.0)
Bootstrapping with 100 boot samples
done
In [13]:
aae3.plot_theta_boot()
In [14]:
aae3.plot_solution_boot()

Compare models

In [15]:
aae.plot_solution_boot()
In [16]:
aae2.plot_solution_boot()
In [17]:
aae3.plot_solution_boot()